在规则编码中,我们常常会遇到需要通过多种区间判断某种物品分类。比如二手物品的定价,尽管不是新品没有 SKU 但是基本的参数是少不了。想通过成色来区分某种物品,其实主要是确定一些参数。然后根据参数数据以及参数对应成色的所有数据集归档用机器学习训练,这样机器就可以得出规则了。
机器制定规则后,我们后面再给相应参数,他就能对成色进行分类了。以上只是打了个比方加之没有二手商品相关的数据集,所以就找了一个企鹅品种数据集,大家可以在网上搜索 “帕尔默企鹅数据集” 就可以下载了。以下内容还是实战类,偏原理的可能后期补上。
1. 背景描述
由 Kristen Gorman 博士和南极洲 LTER 的帕尔默科考站共同创建,包含 344 只企鹅的数据。
2. 数据说明
species: 三个企鹅种类:阿德利 (Adelie) 巴布亚 (Gentoo) 帽带 (Chinstrap)
culmen_length_mm: 鸟的嘴峰长度
culmen_depth_mm: 鸟的嘴峰深度
flipper_length_mm: 脚掌长度
body_mass_g: 体重
island: 岛屿的名字
sex: 企鹅的性别
tensorflow.js 在进行训练前,都需要对原先的数据集进行 tensor 格式 转换,为了训练质量,数据集的数值最好控制在 0 到 1 之间,所以必要时候还要对转换的 tensor 进行归一化处理。对于新手而言,这里的处理方式看个人,我就用 js 方式进行的处理。因为 "帕尔默企鹅数据集" 是 csv,我就用 js 原始的方法进行了数据转化。
数组索引 0 是企鹅种类 (0. 阿德利,1. 帽带 2. 巴布亚), 索引 1 岛屿 (0.Torgersen 1.Biscoe 2.Dream), 索引 2,索引 3,索引 4,索引 5 分别是企鹅嘴峰长度,企鹅嘴峰深度,脚掌长度,体重,性别。以上数据是长度的单位都是毫米,体重的单位都是克,所以数值比较大,转化数据如下。
const IRIS_DATA = [ [0,0,3.91,1.8699999999999999,1.81,3.75,0], [0,0,3.95,1.7399999999999998,1.86,3.8,1], [0,0,4.029999999999999,1.8,1.95,3.25,1], [0,0,3.6700000000000004,1.9300000000000002,1.93,3.45,1], [0,0,3.9299999999999997,2.06,1.9,3.65,0], [0,0,3.8899999999999997,1.78,1.81,3.625,1], [0,0,3.9200000000000004,1.9600000000000002,1.95,4.675,0], [0,0,4.11,1.7600000000000002,1.82,3.2,1], [0,0,3.8600000000000003,2.12,1.91,3.8,0], [0,0,3.46,2.1100000000000003,1.98,4.4,0], [0,0,3.66,1.78,1.85,3.7,1], [0,0,3.87,1.9,1.95,3.45,1], [0,0,4.25,2.07,1.97,4.5,0], [0,0,3.44,1.8399999999999999,1.84,3.325,1], [0,0,4.6,2.15,1.94,4.2,0], [0,1,3.78,1.83,1.74,3.4,1], [0,1,3.7700000000000005,1.8699999999999999,1.8,3.6,0], [0,1,3.59,1.92,1.89,3.8,1], [0,1,3.8200000000000003,1.81,1.85,3.95,0], [0,1,3.88,1.72,1.8,3.8,0], [0,1,3.53,1.89,1.87,3.8,1], [0,1,4.0600000000000005,1.86,1.83,3.55,0], [0,1,4.05,1.7899999999999998,1.87,3.2,1], [0,1,3.79,1.86,1.72,3.15,1], [0,1,4.05,1.89,1.8,3.95,0], [0,2,3.95,1.67,1.78,3.25,1], [0,2,3.72,1.81,1.78,3.9,0], [0,2,3.95,1.78,1.88,3.3,1], [0,2,4.09,1.89,1.84,3.9,0], [0,2,3.6399999999999997,1.7,1.95,3.325,1], [0,2,3.9200000000000004,2.1100000000000003,1.96,4.15,0], [0,2,3.88,2,1.9,3.95,0], [0,2,4.220000000000001,1.85,1.8,3.55,1], [0,2,3.7600000000000002,1.9300000000000002,1.81,3.3,1], [0,2,3.9799999999999995,1.9100000000000001,1.84,4.65,0], [0,2,3.65,1.8,1.82,3.15,1], [0,2,4.08,1.8399999999999999,1.95,3.9,0], [0,2,3.6,1.85,1.86,3.1,1], [0,2,4.41,1.97,1.96,4.4,0], [0,2,3.7,1.69,1.85,3,1], [0,2,3.96,1.8800000000000001,1.9,4.6,0], [0,2,4.11,1.9,1.82,3.425,0], [0,2,3.6,1.7899999999999998,1.9,3.45,1], [0,2,4.2299999999999995,2.12,1.91,4.15,0], [0,1,3.96,1.77,1.86,3.5,1], [0,1,4.01,1.89,1.88,4.3,0], [0,1,3.5,1.7899999999999998,1.9,3.45,1], [0,1,4.2,1.95,2,4.05,0], [0,1,3.45,1.81,1.87,2.9,1], [0,1,4.14,1.86,1.91,3.7,0], [0,1,3.9,1.75,1.86,3.55,1], [0,1,4.0600000000000005,1.8800000000000001,1.93,3.8,0], [0,1,3.65,1.6600000000000001,1.81,2.85,1], [0,1,3.7600000000000002,1.9100000000000001,1.94,3.75,0], [0,1,3.5700000000000003,1.69,1.85,3.15,1], [0,1,4.13,2.1100000000000003,1.95,4.4,0], [0,1,3.7600000000000002,1.7,1.85,3.6,1], [0,1,4.11,1.8199999999999998,1.92,4.05,0], [0,1,3.6399999999999997,1.7100000000000002,1.84,2.85,1], [0,1,4.16,1.8,1.92,3.95,0], [0,1,3.55,1.6199999999999999,1.95,3.35,1], [0,1,4.11,1.9100000000000001,1.88,4.1,0], [0,0,3.59,1.6600000000000001,1.9,3.05,1], [0,0,4.18,1.94,1.98,4.45,0], [0,0,3.35,1.9,1.9,3.6,1], [0,0,3.97,1.8399999999999999,1.9,3.9,0], [0,0,3.96,1.72,1.96,3.55,1], [0,0,4.58,1.89,1.97,4.15,0], [0,0,3.55,1.75,1.9,3.7,1], [0,0,4.279999999999999,1.85,1.95,4.25,0], [0,0,4.09,1.6800000000000002,1.91,3.7,1], [0,0,3.72,1.94,1.84,3.9,0], [0,0,3.62,1.61,1.87,3.55,1], [0,0,4.21,1.9100000000000001,1.95,4,0], [0,0,3.46,1.72,1.89,3.2,1], [0,0,4.29,1.7600000000000002,1.96,4.7,0], [0,0,3.6700000000000004,1.8800000000000001,1.87,3.8,1], [0,0,3.5100000000000002,1.94,1.93,4.2,0], [0,2,3.7299999999999995,1.78,1.91,3.35,1], [0,2,4.13,2.0300000000000002,1.94,3.55,0], [0,2,3.63,1.95,1.9,3.8,0], [0,2,3.69,1.86,1.89,3.5,1], [0,2,3.8299999999999996,1.92,1.89,3.95,0], [0,2,3.8899999999999997,1.8800000000000001,1.9,3.6,1], [0,2,3.5700000000000003,1.8,2.02,3.55,1], [0,2,4.11,1.81,2.05,4.3,0], [0,2,3.4,1.7100000000000002,1.85,3.4,1], [0,2,3.96,1.81,1.86,4.45,0], [0,2,3.62,1.73,1.87,3.3,1], [0,2,4.08,1.89,2.08,4.3,0], [0,2,3.81,1.86,1.9,3.7,1], [0,2,4.029999999999999,1.85,1.96,4.35,0], [0,2,3.31,1.61,1.78,2.9,1], [0,2,4.32,1.85,1.92,4.1,0], [0,1,3.5,1.7899999999999998,1.92,3.725,1], [0,1,4.1,2,2.03,4.725,0], [0,1,3.7700000000000005,1.6,1.83,3.075,1], [0,1,3.78,2,1.9,4.25,0], [0,1,3.79,1.86,1.93,2.925,1], [0,1,3.97,1.89,1.84,3.55,0], [0,1,3.8600000000000003,1.72,1.99,3.75,1], [0,1,3.8200000000000003,2,1.9,3.9,0], [0,1,3.81,1.7,1.81,3.175,1], [0,1,4.32,1.9,1.97,4.775,0], [0,1,3.81,1.65,1.98,3.825,1], [0,1,4.5600000000000005,2.0300000000000002,1.91,4.6,0], [0,1,3.97,1.77,1.93,3.2,1], [0,1,4.220000000000001,1.95,1.97,4.275,0], [0,1,3.96,2.07,1.91,3.9,1], [0,1,4.2700000000000005,1.83,1.96,4.075,0], [0,0,3.8600000000000003,1.7,1.88,2.9,1], [0,0,3.7299999999999995,2.05,1.99,3.775,0], [0,0,3.5700000000000003,1.7,1.89,3.35,1], [0,0,4.11,1.86,1.89,3.325,0], [0,0,3.62,1.72,1.87,3.15,1], [0,0,3.7700000000000005,1.98,1.98,3.5,0], [0,0,4.0200000000000005,1.7,1.76,3.45,1], [0,0,4.14,1.85,2.02,3.875,0], [0,0,3.5200000000000005,1.59,1.86,3.05,1], [0,0,4.0600000000000005,1.9,1.99,4,0], [0,0,3.88,1.7600000000000002,1.91,3.275,1], [0,0,4.15,1.83,1.95,4.3,0], [0,0,3.9,1.7100000000000002,1.91,3.05,1], [0,0,4.41,1.8,2.1,4,0], [0,0,3.85,1.7899999999999998,1.9,3.325,1], [0,0,4.3100000000000005,1.92,1.97,3.5,0], [0,2,3.6799999999999997,1.85,1.93,3.5,1], [0,2,3.75,1.85,1.99,4.475,0], [0,2,3.81,1.7600000000000002,1.87,3.425,1], [0,2,4.11,1.75,1.9,3.9,0], [0,2,3.56,1.75,1.91,3.175,1], [0,2,4.0200000000000005,2.0100000000000002,2,3.975,0], [0,2,3.7,1.65,1.85,3.4,1], [0,2,3.97,1.7899999999999998,1.93,4.25,0], [0,2,4.0200000000000005,1.7100000000000002,1.93,3.4,1], [0,2,4.0600000000000005,1.72,1.87,3.475,0], [0,2,3.21,1.55,1.88,3.05,1], [0,2,4.07,1.7,1.9,3.725,0], [0,2,3.7299999999999995,1.6800000000000002,1.92,3,1], [0,2,3.9,1.8699999999999999,1.85,3.65,0], [0,2,3.9200000000000004,1.86,1.9,4.25,0], [0,2,3.66,1.8399999999999999,1.84,3.475,1], [0,2,3.6,1.78,1.95,3.45,1], [0,2,3.78,1.81,1.93,3.75,0], [0,2,3.6,1.7100000000000002,1.87,3.7,1], [0,2,4.15,1.85,2.01,4,0], [1,2,4.65,1.7899999999999998,1.92,3.5,1], [1,2,5,1.95,1.96,3.9,0], [1,2,5.13,1.92,1.93,3.65,0], [1,2,4.54,1.8699999999999999,1.88,3.525,1], [1,2,5.2700000000000005,1.98,1.97,3.725,0], [1,2,4.5200000000000005,1.78,1.98,3.95,1], [1,2,4.61,1.8199999999999998,1.78,3.25,1], [1,2,5.13,1.8199999999999998,1.97,3.75,0], [1,2,4.6,1.89,1.95,4.15,1], [1,2,5.13,1.9899999999999998,1.98,3.7,0], [1,2,4.66,1.78,1.93,3.8,1], [1,2,5.17,2.0300000000000002,1.94,3.775,0], [1,2,4.7,1.73,1.85,3.7,1], [1,2,5.2,1.81,2.01,4.05,0], [1,2,4.59,1.7100000000000002,1.9,3.575,1], [1,2,5.05,1.9600000000000002,2.01,4.05,0], [1,2,5.029999999999999,2,1.97,3.3,0], [1,2,5.8,1.78,1.81,3.7,1], [1,2,4.64,1.86,1.9,3.45,1], [1,2,4.92,1.8199999999999998,1.95,4.4,0], [1,2,4.24,1.73,1.81,3.6,1], [1,2,4.85,1.75,1.91,3.4,0], [1,2,4.32,1.6600000000000001,1.87,2.9,1], [1,2,5.0600000000000005,1.94,1.93,3.8,0], [1,2,4.67,1.7899999999999998,1.95,3.3,1], [1,2,5.2,1.9,1.97,4.15,0], [1,2,5.05,1.8399999999999999,2,3.4,1], [1,2,4.95,1.9,2,3.8,0], [1,2,4.64,1.78,1.91,3.7,1], [1,2,5.279999999999999,2,2.05,4.55,0], [1,2,4.09,1.6600000000000001,1.87,3.2,1], [1,2,5.42,2.08,2.01,4.3,0], [1,2,4.25,1.67,1.87,3.35,1], [1,2,5.1,1.8800000000000001,2.03,4.1,0], [1,2,4.970000000000001,1.86,1.95,3.6,0], [1,2,4.75,1.6800000000000002,1.99,3.9,1], [1,2,4.76,1.83,1.95,3.85,1], [1,2,5.2,2.07,2.1,4.8,0], [1,2,4.6899999999999995,1.6600000000000001,1.92,2.7,1], [1,2,5.35,1.9899999999999998,2.05,4.5,0], [1,2,4.9,1.95,2.1,3.95,0], [1,2,4.62,1.75,1.87,3.65,1], [1,2,5.09,1.9100000000000001,1.96,3.55,0], [1,2,4.55,1.7,1.96,3.5,1], [1,2,5.09,1.7899999999999998,1.96,3.675,1], [1,2,5.08,1.85,2.01,4.45,0], [1,2,5.01,1.7899999999999998,1.9,3.4,1], [1,2,4.9,1.9600000000000002,2.12,4.3,0], [1,2,5.15,1.8699999999999999,1.87,3.25,0], [1,2,4.9799999999999995,1.73,1.98,3.675,1], [1,2,4.8100000000000005,1.64,1.99,3.325,1], [1,2,5.14,1.9,2.01,3.95,0], [1,2,4.57,1.73,1.93,3.6,1], [1,2,5.07,1.97,2.03,4.05,0], [1,2,4.25,1.73,1.87,3.35,1], [1,2,5.220000000000001,1.8800000000000001,1.97,3.45,0], [1,2,4.5200000000000005,1.6600000000000001,1.91,3.25,1], [1,2,4.93,1.9899999999999998,2.03,4.05,0], [1,2,5.0200000000000005,1.8800000000000001,2.02,3.8,0], [1,2,4.5600000000000005,1.94,1.94,3.525,1], [1,2,5.1899999999999995,1.95,2.06,3.95,0], [1,2,4.68,1.65,1.89,3.65,1], [1,2,4.57,1.7,1.95,3.65,1], [1,2,5.58,1.98,2.07,4,0], [1,2,4.35,1.81,2.02,3.4,1], [1,2,4.96,1.8199999999999998,1.93,3.775,0], [1,2,5.08,1.9,2.1,4.1,0], [1,2,5.0200000000000005,1.8699999999999999,1.98,3.775,1], [2,1,4.61,1.3199999999999998,2.11,4.5,1], [2,1,5,1.6300000000000001,2.3,5.7,0], [2,1,4.87,1.41,2.1,4.45,1], [2,1,5,1.52,2.18,5.7,0], [2,1,4.76,1.45,2.15,5.4,0], [2,1,4.65,1.35,2.1,4.55,1], [2,1,4.54,1.46,2.11,4.8,1], [2,1,4.67,1.53,2.19,5.2,0], [2,1,4.33,1.34,2.09,4.4,1], [2,1,4.68,1.54,2.15,5.15,0], [2,1,4.09,1.3699999999999999,2.14,4.65,1], [2,1,4.9,1.61,2.16,5.55,0], [2,1,4.55,1.3699999999999999,2.14,4.65,1], [2,1,4.84,1.46,2.13,5.85,0], [2,1,4.58,1.46,2.1,4.2,1], [2,1,4.93,1.5699999999999998,2.17,5.85,0], [2,1,4.2,1.35,2.1,4.15,1], [2,1,4.92,1.52,2.21,6.3,0], [2,1,4.62,1.45,2.09,4.8,1], [2,1,4.87,1.51,2.22,5.35,0], [2,1,5.0200000000000005,1.4300000000000002,2.18,5.7,0], [2,1,4.51,1.45,2.15,5,1], [2,1,4.65,1.45,2.13,4.4,1], [2,1,4.63,1.58,2.15,5.05,0], [2,1,4.29,1.31,2.15,5,1], [2,1,4.61,1.51,2.15,5.1,0], [2,1,4.779999999999999,1.5,2.15,5.65,0], [2,1,4.82,1.4300000000000002,2.1,4.6,1], [2,1,5,1.53,2.2,5.55,0], [2,1,4.7299999999999995,1.53,2.22,5.25,0], [2,1,4.279999999999999,1.42,2.09,4.7,1], [2,1,4.51,1.45,2.07,5.05,1], [2,1,5.96,1.7,2.3,6.05,0], [2,1,4.91,1.48,2.2,5.15,1], [2,1,4.84,1.6300000000000001,2.2,5.4,0], [2,1,4.26,1.3699999999999999,2.13,4.95,1], [2,1,4.4399999999999995,1.73,2.19,5.25,0], [2,1,4.4,1.3599999999999999,2.08,4.35,1], [2,1,4.87,1.5699999999999998,2.08,5.35,0], [2,1,4.2700000000000005,1.3699999999999999,2.08,3.95,1], [2,1,4.96,1.6,2.25,5.7,0], [2,1,4.529999999999999,1.3699999999999999,2.1,4.3,1], [2,1,4.96,1.5,2.16,4.75,0], [2,1,5.05,1.59,2.22,5.55,0], [2,1,4.36,1.3900000000000001,2.17,4.9,1], [2,1,4.55,1.3900000000000001,2.1,4.2,1], [2,1,5.05,1.59,2.25,5.4,0], [2,1,4.49,1.33,2.13,5.1,1], [2,1,4.5200000000000005,1.58,2.15,5.3,0], [2,1,4.66,1.42,2.1,4.85,1], [2,1,4.85,1.41,2.2,5.3,0], [2,1,4.51,1.44,2.1,4.4,1], [2,1,5.01,1.5,2.25,5,0], [2,1,4.65,1.44,2.17,4.9,1], [2,1,4.5,1.54,2.2,5.05,0], [2,1,4.38,1.3900000000000001,2.08,4.3,1], [2,1,4.55,1.5,2.2,5,0], [2,1,4.32,1.45,2.08,4.45,1], [2,1,5.04,1.53,2.24,5.55,0], [2,1,4.529999999999999,1.3800000000000001,2.08,4.2,1], [2,1,4.62,1.49,2.21,5.3,0], [2,1,4.57,1.3900000000000001,2.14,4.4,1], [2,1,5.43,1.5699999999999998,2.31,5.65,0], [2,1,4.58,1.42,2.19,4.7,1], [2,1,4.9799999999999995,1.6800000000000002,2.3,5.7,0], [2,1,4.95,1.6199999999999999,2.29,5.8,0], [2,1,4.35,1.42,2.2,4.7,1], [2,1,5.07,1.5,2.23,5.55,0], [2,1,4.7700000000000005,1.5,2.16,4.75,1], [2,1,4.64,1.56,2.21,5,0], [2,1,4.82,1.56,2.21,5.1,0], [2,1,4.65,1.48,2.17,5.2,1], [2,1,4.64,1.5,2.16,4.7,1], [2,1,4.86,1.6,2.3,5.8,0], [2,1,4.75,1.42,2.09,4.6,1], [2,1,5.11,1.6300000000000001,2.2,6,0], [2,1,4.5200000000000005,1.3800000000000001,2.15,4.75,1], [2,1,4.5200000000000005,1.64,2.23,5.95,0], [2,1,4.91,1.45,2.12,4.625,1], [2,1,5.25,1.56,2.21,5.45,0], [2,1,4.74,1.46,2.12,4.725,1], [2,1,5,1.59,2.24,5.35,0], [2,1,4.49,1.3800000000000001,2.12,4.75,1], [2,1,5.08,1.73,2.28,5.6,0], [2,1,4.34,1.44,2.18,4.6,1], [2,1,5.13,1.42,2.18,5.3,0], [2,1,4.75,1.4,2.12,4.875,1], [2,1,5.21,1.7,2.3,5.55,0], [2,1,4.75,1.5,2.18,4.95,1], [2,1,5.220000000000001,1.7100000000000002,2.28,5.4,0], [2,1,4.55,1.45,2.12,4.75,1], [2,1,4.95,1.61,2.24,5.65,0], [2,1,4.45,1.47,2.14,4.85,1], [2,1,5.08,1.5699999999999998,2.26,5.2,0], [2,1,4.9399999999999995,1.58,2.16,4.925,0], [2,1,4.6899999999999995,1.46,2.22,4.875,1], [2,1,4.84,1.44,2.03,4.625,1], [2,1,5.11,1.65,2.25,5.25,0], [2,1,4.85,1.5,2.19,4.85,1], [2,1,5.59,1.7,2.28,5.6,0], [2,1,4.720000000000001,1.55,2.15,4.975,1], [2,1,4.91,1.5,2.28,5.5,0], [2,1,4.68,1.61,2.15,5.5,0], [2,1,4.17,1.47,2.1,4.7,1], [2,1,5.34,1.58,2.19,5.5,0], [2,1,4.33,1.4,2.08,4.575,1], [2,1,4.8100000000000005,1.51,2.09,5.5,0], [2,1,5.05,1.52,2.16,5,1], [2,1,4.9799999999999995,1.59,2.29,5.95,0], [2,1,4.35,1.52,2.13,4.65,1], [2,1,5.15,1.6300000000000001,2.3,5.5,0], [2,1,4.62,1.41,2.17,4.375,1], [2,1,5.51,1.6,2.3,5.85,0], [2,1,4.88,1.6199999999999999,2.22,6,0], [2,1,4.720000000000001,1.3699999999999999,2.14,4.925,1], [2,1,4.68,1.4300000000000002,2.15,4.85,1], [2,1,5.04,1.5699999999999998,2.22,5.75,0], [2,1,4.5200000000000005,1.48,2.12,5.2,1], [2,1,4.99,1.61,2.13,5.4,0] ];
1. 数据标注
虽然上面已经对数据做了初步处理,但是还没达到可以用来训练的效果。为了能转换为 Tensor 需要将数据进行拆分标注 value 和 label,为了提升训练性能需要将数据集分为训练集和验证集,以下提供了数据拆分和转换的两个函数。
import * as tf from '@tensorflow/tfjs';// Adelie:阿德利, Chinstrap:帽带, Gentoo: 巴布亚export const IRIS_CLASSES = ['阿德利', '帽带', '巴布亚'];export const IRIS_NUM_CLASSES = IRIS_CLASSES.length;// 性别const SEX = ['MALE','FEMALE'];// 所处岛屿const LAND = ['Torgersen','Biscoe','Dream'];function convertToTensors(data, targets, testSplit) { const numExamples = data.length; if (numExamples !== targets.length) { throw new Error('data and split have different numbers of examples'); } const indices = []; for (let i = 0; i < numExamples; ++i) { indices.push(i); } tf.util.shuffle(indices); const shuffledData = []; const shuffledTargets = []; for (let i = 0; i < numExamples; ++i) { shuffledData.push(data[indices[i]]); shuffledTargets.push(targets[indices[i]]); } const numTestExamples = Math.round(numExamples * testSplit); const numTrainExamples = numExamples - numTestExamples; const xDims = shuffledData[0].length; const xs = tf.tensor2d(shuffledData, [numExamples, xDims]); const ys = tf.oneHot(tf.tensor1d(shuffledTargets).toInt(), IRIS_NUM_CLASSES); const xTrain = xs.slice([0, 0], [numTrainExamples, xDims]); const xTest = xs.slice([numTrainExamples, 0], [numTestExamples, xDims]); const yTrain = ys.slice([0, 0], [numTrainExamples, IRIS_NUM_CLASSES]); const yTest = ys.slice([0, 0], [numTestExamples, IRIS_NUM_CLASSES]); return [xTrain, yTrain, xTest, yTest]; }export function getIrisData(testSplit) { return tf.tidy(() => { const dataByClass = []; const targetsByClass = []; for (let i = 0; i < IRIS_CLASSES.length; ++i) { dataByClass.push([]); targetsByClass.push([]); } for (const example of IRIS_DATA) { const target = example[0]; const data = example.slice(1, example.length); dataByClass[target].push(data); targetsByClass[target].push(target); } const xTrains = []; const yTrains = []; const xTests = []; const yTests = []; for (let i = 0; i < IRIS_CLASSES.length; ++i) { const [xTrain, yTrain, xTest, yTest] = convertToTensors(dataByClass[i], targetsByClass[i], testSplit); xTrains.push(xTrain); yTrains.push(yTrain); xTests.push(xTest); yTests.push(yTest); } const concatAxis = 0; return [ tf.concat(xTrains, concatAxis), tf.concat(yTrains, concatAxis), tf.concat(xTests, concatAxis), tf.concat(yTests, concatAxis) ]; }); }
2. 页面布局
页面通过 html 方式展示,通过选择和输入必要的参数,模型预测出企鹅的种类,同样的也有整个模型训练过程 UI 展示,代码如下。
<script class="lazyload" src="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAAAXNSR0IArs4c6QAAAARnQU1BAACxjwv8YQUAAAAJcEhZcwAADsQAAA7EAZUrDhsAAAANSURBVBhXYzh8+PB/AAffA0nNPuCLAAAAAElFTkSuQmCC" data-original="script.js"></script><form action="#" onsubmit="predict(this); return false;"> <!-- 岛屿名:<input type="text" name="a"><br> --> 岛屿名:<select name="a"> <option value="0">南极帕尔默</option> <option value="1">南极比斯科</option> <option value="2">南极梦幻</option> </select> <br> 嘴峰长度(mm):<input type="text" name="b"><br> 嘴峰深度(mm):<input type="text" name="c"><br> 脚掌长度(mm):<input type="text" name="d"><br> 体重(g):<input type="text" name="e"><br> <!-- 性别:<input type="text" name="f"><br> --> 性别:<select name="f"> <option value="0">雄性</option> <option value="1">雌性</option> </select> <br> <button type="submit">预测</button></form>
3. 模型训练
模型训练还是和之前线性回归类似,创建模型,添加隐藏层输出层和设置神经元格式,激活函数等,最后模型编译和模型训练。
import * as tf from '@tensorflow/tfjs';import * as tfvis from '@tensorflow/tfjs-vis';import { getIrisData, IRIS_CLASSES } from './data';window.onload = async () => { const [xTrain, yTrain, xTest, yTest] = getIrisData(0.15); const model = tf.sequential(); model.add(tf.layers.dense({ units: 10, inputShape: [xTrain.shape[1]], activation: 'sigmoid' })); model.add(tf.layers.dense({ units: 3, activation: 'softmax' })); model.compile({ loss: 'categoricalCrossentropy', optimizer: tf.train.adam(0.1), metrics: ['accuracy'] }); await model.fit(xTrain, yTrain, { epochs: 100, validationData: [xTest, yTest], callbacks: tfvis.show.fitCallbacks( { name: '训练效果' }, ['loss', 'val_loss', 'acc', 'val_acc'], { callbacks: ['onEpochEnd'] } ) }); // 为了将企鹅分类数据值降低到个位数 window.predict = (form) => { const input = tf.tensor([[ form.a.value * 1, form.b.value * 1/10, form.c.value * 1/10, form.d.value * 1/100, form.e.value * 1/1000, form.f.value * 1 ]]); const pred = model.predict(input); alert(`预测结果:${IRIS_CLASSES[pred.argMax(1).dataSync(0)]}`); return false; }; };
这里要注意一点是,在训练模型有问题时,点击 “预测” 会出现表单跳转。而如果训练数据集数值过大,训练的损失极大很难降下来。