Spark SQL(八)之基于物品的相似度公式
一、基于物品的Jaccard相似度公式

其中,i、j表示任意两个物品,N(i)表示喜欢物品i的用户数,N(j)表示喜欢物品j的用户数。
代码:
public class ItemCFApp {
public static void main(String[]args){
SparkConf sparkConf = new SparkConf();
sparkConf.setAppName("ItemCFApp");
sparkConf.setMaster("local[*]");
SparkSession sparkSession = SparkSession.builder().config(sparkConf).getOrCreate();
String url = "jdbc:mysql://localhost:3306/spark-mysql?useUnicode=true&characterEncoding=utf8&autoReconnect=true&failOverReadOnly=false";
String driver = "com.mysql.jdbc.Driver";
String user = "root";
String password = "admin";
Dataset<Row> dataset = sparkSession.read()
.format("jdbc")
.option("driver", driver)
.option("url",url)
.option("dbtable","user_item")
.option("user",user)
.option("password",password)
.load();
Dataset<Row> itemCount = dataset.groupBy("item_id").count();
Dataset<Row> item2ItemCount = dataset.as("a").join(dataset.as("b"),
functions.column("a.user_id").$eq$eq$eq(functions.column("b.user_id")))
.where(functions.column("a.item_id").notEqual(functions.column("b.item_id")))
.select(functions.column("a.item_id").as("a_item_id"),
functions.column("b.item_id").as("b_item_id"))
.groupBy("a_item_id", "b_item_id").count();
Dataset<Row> result = item2ItemCount.as("i2i")
.join(itemCount.as("ic1"), functions.column("i2i.a_item_id").$eq$eq$eq(functions.column("ic1.item_id")))
.join(itemCount.as("ic2"), functions.column("i2i.b_item_id").$eq$eq$eq(functions.column("ic2.item_id")))
.selectExpr("i2i.a_item_id", "i2i.b_item_id", "i2i.count/(ic1.count + ic2.count - i2i.count) as count");
result.show();
// result.write()
// .mode(SaveMode.Overwrite)
// .format("jdbc")
// .option("driver", driver)
// .option("url",url)
// .option("dbtable","item_similar")
// .option("user",user)
// .option("password",password)
// .save();
sparkSession.stop();
}
}
二、基于物品的余弦相似度公式

其中,i、j表示任意两个物品,N(i)表示喜欢物品i的用户数,N(j)表示喜欢物品j的用户数。
代码:
public class ItemCF2App {
public static void main(String[]args){
SparkConf sparkConf = new SparkConf();
sparkConf.setAppName("ItemCFApp");
sparkConf.setMaster("local[*]");
SparkSession sparkSession = SparkSession.builder().config(sparkConf).getOrCreate();
String url = "jdbc:mysql://localhost:3306/spark-mysql?useUnicode=true&characterEncoding=utf8&autoReconnect=true&failOverReadOnly=false";
String driver = "com.mysql.jdbc.Driver";
String user = "root";
String password = "admin";
Dataset<Row> dataset = sparkSession.read()
.format("jdbc")
.option("driver", driver)
.option("url",url)
.option("dbtable","user_item")
.option("user",user)
.option("password",password)
.load();
Dataset<Row> itemCount = dataset.groupBy("item_id").count();
Dataset<Row> item2ItemCount = dataset.as("a").join(dataset.as("b"),
functions.column("a.user_id").$eq$eq$eq(functions.column("b.user_id")))
.where(functions.column("a.item_id").notEqual(functions.column("b.item_id")))
.select(functions.column("a.item_id").as("a_item_id"),
functions.column("b.item_id").as("b_item_id"))
.groupBy("a_item_id", "b_item_id").count();
Dataset<Row> result = item2ItemCount.as("i2i")
.join(itemCount.as("ic1"), functions.column("i2i.a_item_id").$eq$eq$eq(functions.column("ic1.item_id")))
.join(itemCount.as("ic2"), functions.column("i2i.b_item_id").$eq$eq$eq(functions.column("ic2.item_id")))
.selectExpr("i2i.a_item_id", "i2i.b_item_id", "i2i.count/pow(ic1.count * ic2.count, 0.5) as count");
result.show();
// result.write()
// .mode(SaveMode.Overwrite)
// .format("jdbc")
// .option("driver", driver)
// .option("url",url)
// .option("dbtable","item_similar")
// .option("user",user)
// .option("password",password)
// .save();
sparkSession.stop();
}
}

浙公网安备 33010602011771号