2019-08-22工作进展

昨天工作:

  1. inference:在相同规则采样的验证集的效果:在正负样本1:4的验证集上:
    acc:0.86 auc:0.84 precision:0.60;在正负样本1:1数据集上:acc:0.84 auc:0.84 precision:0.87;模型表现比较正常;但是在外包标注的数据上的inference表现非常差,大多数样本都被网络认为是负样本;
  2. 实验分析:考虑到网络在两个验证集上的效果差别比较大,在使用相同规则采样得到的验证集中的效果尚可的,说明网络有着比较不错的学习效果,但是在外包标注的数据上的效果不好,可能是训练集构造不合理;

今天计划:
通过词替换的方式重新构造训练集

  1. 对比了各种分词方式,使用segment_mainse的方式是最合适的,老爹鞋,T恤女

  2. 重新构造训练集

品牌词:search_kg:alinlp_brand_recognition()
常用表:
graph_embedding.jl_jingyan_query_related_top_query
graph_embedding.jl_jingyan_query_related_top_query_detailed
ds=20190819

关键词替换:
graph_embedding.jl_jingyan_query_related_top_query

create table hs_tmp_274 as select * from graph_embedding.jl_jingyan_query_related_top_query where ds = 20190819;

insert overwrite table hs_tmp_275 select query, search_kg:alinlp_segment(query, "MAINSE", "0", "1", " ", "true", ":", "|") as query_seg from hs_tmp_274;

insert overwrite table hs_tmp_276 select query, search_kg:alinlp_brand_recognition(query) as brand from hs_tmp_274;

insert overwrite table hs_tmp_277 select * from hs_tmp_280 where query like '%包包%';

create table hs_tmp_278 as select hs_return_brand(query, query_seg) as (query, brand) from hs_tmp_275;

create table hs_tmp_279 as select brand, count(*) as freq from hs_tmp_278 group by brand order by freq;

create table hs_tmp_280 as
select row_number()over() as index, c.query, c.brand, d.freq from
(select a.query, a.brand from hs_tmp_278 a join (select * from hs_tmp_274 limit 10000)b on a.query == b.query)c join hs_tmp_279 d on c.brand == d.brand;

brand数量太少的情况

发现将阈值设置在大于9是合适的,剩下7625条query可用,其余的2k多条数据还是使用之前的方式进行负样本采样

create table hs_tmp_281 as
select *,row_number() over (partition by brand order by cast(freq as bigint) desc) as group_freq from hs_tmp_280;

create table hs_tmp_282 as
select *,row_number() over (partition by brand order by cast(freq as bigint) desc) as group_freq from hs_tmp_278;

query 的 index 从这儿来
create table hs_tmp_283 as
select row_number()over() as index, c.query, c.brand, d.freq from
(select a.query, a.brand from hs_tmp_278 a join (select * from hs_tmp_274)b on a.query == b.query)c join hs_tmp_279 d on c.brand == d.brand;

create table hs_tmp_284 as
select *,row_number() over (partition by brand order by cast(freq as bigint) desc) as group_freq from hs_tmp_283;

create table hs_tmp_285 as
select *, cast(rand() * freq + 1 as bigint) as choose from hs_tmp_284;

create table hs_tmp_286 as
select a.*, b.query as query1 from hs_tmp_285 a join hs_tmp_285 b on a.brand == b.brand and a.group_freq == b.choose1;

insert into table hs_tmp_286 select a.*, b.query as query1 from hs_tmp_285 a join hs_tmp_285 b on a.brand == b.brand and a.group_freq == b.choose3;

create table hs_tmp_287 as
select distinct index, query, brand, query1 as choose_freq from hs_tmp_286;

create table hs_tmp_288 as
select *,row_number() over (partition by index order by query desc) as group_freq from hs_tmp_287;

drop table if exists graph_embedding.hs_tmp_289;
yes
create table if not exists graph_embedding.hs_tmp_289
as select se_keyword, item_id from (
select distinct se_keyword, item_id from (
select bi_udf:bi_split_value(se_keyword, item_list, ",") as (se_keyword, item_id)
from (
select distinct se_keyword, item_list
from graph_embedding.jl_jingyan_query_related_top_query_detailed
where ds=20190819 and page_seq=1
)a
)b
)c;

create table hs_tmp_290 as select b.index, b.query, b.brand, a.item_id from
hs_tmp_289 a join hs_tmp_283 b on a.se_keyword == b.query order by index;

create table hs_tmp_291 as select index, count(*) as freq from hs_tmp_290 group by index order by freq desc;

create table hs_tmp_292 as
select *,row_number() over (partition by index order by query desc) as group_freq from hs_tmp_290;

insert overwrite table hs_tmp_292 select * from hs_tmp_292 where group_freq < 8000;

create table hs_tmp_293 as select b.index, b.query as query_pos, b.brand, b.choose_freq as query_neg, a.index as index_neg from hs_tmp_283 a join hs_tmp_287 b on a.query == b.choose_freq;

create table hs_tmp_294 as select index, query, brand, item_id, 1 as label from hs_tmp_292 where index < 10001;

create table hs_tmp_295 as select a.index, a.query_pos, a.query_neg, a.index_neg, b.item_id from hs_tmp_293 a join hs_tmp_292 b on a.index_neg = b.index;

insert overwrite table hs_tmp_295 select * from hs_tmp_295 where index < 10001;

296可以使用这种方式进行负样本采样,297需要随机采样或者使用之前的采样方式
create table hs_tmp_296 as select distinct query from hs_tmp_280 where freq > 9;
create table hs_tmp_297 as select distinct query from hs_tmp_280 where freq < 10;

create table hs_tmp_298 as select * from hs_tmp_295 a join hs_tmp_296 b on a.query_pos == b.query;

create table hs_tmp_299 as select a.* from hs_tmp_294 a join hs_tmp_296 b on a.query == b.query;

create table hs_tmp_300 as select index, item_id, label from hs_tmp_299;
insert into table hs_tmp_300 select index, item_id, 2 as label from hs_tmp_298 where index == index_neg;

hs_tmp_298 : | index | query_pos | query_neg | index_neg | item_id | query |
hs_tmp_300: | index | item_id | label |

求差集
create table hs_tmp_301 as select c.* from
(select a.*, b.index as indexb, b.item_id as item_idb from hs_tmp_298 a left join hs_tmp_300 b on a.index == b.index and a.item_id == b.item_id)c where c.indexb is NULL and c.item_idb is NULL;

create table hs_tmp_302 as select index, item_id, 0 as label from hs_tmp_301;

insert overwrite table hs_tmp_302 select * from hs_tmp_302 distribute by random();
insert overwrite table hs_tmp_302 select * from hs_tmp_302 distribute by random();
insert overwrite table hs_tmp_302 select * from hs_tmp_302 distribute by random();
insert overwrite table hs_tmp_302 select * from hs_tmp_302 distribute by random();
insert overwrite table hs_tmp_302 select * from hs_tmp_302 distribute by random();
insert overwrite table hs_tmp_302 select * from hs_tmp_302 distribute by random();
insert overwrite table hs_tmp_302 select * from hs_tmp_302 distribute by random();

insert overwrite table hs_tmp_300 select * from hs_tmp_300 where label = 1;
insert into table hs_tmp_300 select * from hs_tmp_302 limit 53504615;

insert overwrite table hs_tmp_300 select * from hs_tmp_300 distribute by random();
insert overwrite table hs_tmp_300 select * from hs_tmp_300 distribute by random();
insert overwrite table hs_tmp_300 select * from hs_tmp_300 distribute by random();
insert overwrite table hs_tmp_300 select * from hs_tmp_300 distribute by random();
insert overwrite table hs_tmp_300 select * from hs_tmp_300 distribute by random();

构造新的query,item数据集

create table hs_tmp_303 as select a.item_id, b.title from
(select distinct item_id from hs_tmp_300) a JOIN
(
select item_id, title from tbcdm.dim_tb_itm
where ds=MAX_PT('tbcdm.dim_tb_itm') and is_online="Y"
)b on a.item_id=b.item_id;

create table hs_tmp_304 as select a.index, b.query from
(select distinct index from hs_tmp_300)a join hs_tmp_283 b on a.index == b.index;

create table hs_tmp_305 as select c., d.title from
(select a.
, b.query from hs_tmp_300 a join hs_tmp_304 b on a.index == b.index)c join hs_tmp_303 d on c.item_id == d.item_id;

©著作权归作者所有,转载或内容合作请联系作者
  • 序言:七十年代末,一起剥皮案震惊了整个滨河市,随后出现的几起案子,更是在滨河造成了极大的恐慌,老刑警刘岩,带你破解...
    沈念sama阅读 206,482评论 6 481
  • 序言:滨河连续发生了三起死亡事件,死亡现场离奇诡异,居然都是意外死亡,警方通过查阅死者的电脑和手机,发现死者居然都...
    沈念sama阅读 88,377评论 2 382
  • 文/潘晓璐 我一进店门,熙熙楼的掌柜王于贵愁眉苦脸地迎上来,“玉大人,你说我怎么就摊上这事。” “怎么了?”我有些...
    开封第一讲书人阅读 152,762评论 0 342
  • 文/不坏的土叔 我叫张陵,是天一观的道长。 经常有香客问我,道长,这世上最难降的妖魔是什么? 我笑而不...
    开封第一讲书人阅读 55,273评论 1 279
  • 正文 为了忘掉前任,我火速办了婚礼,结果婚礼上,老公的妹妹穿的比我还像新娘。我一直安慰自己,他们只是感情好,可当我...
    茶点故事阅读 64,289评论 5 373
  • 文/花漫 我一把揭开白布。 她就那样静静地躺着,像睡着了一般。 火红的嫁衣衬着肌肤如雪。 梳的纹丝不乱的头发上,一...
    开封第一讲书人阅读 49,046评论 1 285
  • 那天,我揣着相机与录音,去河边找鬼。 笑死,一个胖子当着我的面吹牛,可吹牛的内容都是我干的。 我是一名探鬼主播,决...
    沈念sama阅读 38,351评论 3 400
  • 文/苍兰香墨 我猛地睁开眼,长吁一口气:“原来是场噩梦啊……” “哼!你这毒妇竟也来了?” 一声冷哼从身侧响起,我...
    开封第一讲书人阅读 36,988评论 0 259
  • 序言:老挝万荣一对情侣失踪,失踪者是张志新(化名)和其女友刘颖,没想到半个月后,有当地人在树林里发现了一具尸体,经...
    沈念sama阅读 43,476评论 1 300
  • 正文 独居荒郊野岭守林人离奇死亡,尸身上长有42处带血的脓包…… 初始之章·张勋 以下内容为张勋视角 年9月15日...
    茶点故事阅读 35,948评论 2 324
  • 正文 我和宋清朗相恋三年,在试婚纱的时候发现自己被绿了。 大学时的朋友给我发了我未婚夫和他白月光在一起吃饭的照片。...
    茶点故事阅读 38,064评论 1 333
  • 序言:一个原本活蹦乱跳的男人离奇死亡,死状恐怖,灵堂内的尸体忽然破棺而出,到底是诈尸还是另有隐情,我是刑警宁泽,带...
    沈念sama阅读 33,712评论 4 323
  • 正文 年R本政府宣布,位于F岛的核电站,受9级特大地震影响,放射性物质发生泄漏。R本人自食恶果不足惜,却给世界环境...
    茶点故事阅读 39,261评论 3 307
  • 文/蒙蒙 一、第九天 我趴在偏房一处隐蔽的房顶上张望。 院中可真热闹,春花似锦、人声如沸。这庄子的主人今日做“春日...
    开封第一讲书人阅读 30,264评论 0 19
  • 文/苍兰香墨 我抬头看了看天上的太阳。三九已至,却和暖如春,着一层夹袄步出监牢的瞬间,已是汗流浃背。 一阵脚步声响...
    开封第一讲书人阅读 31,486评论 1 262
  • 我被黑心中介骗来泰国打工, 没想到刚下飞机就差点儿被人妖公主榨干…… 1. 我叫王不留,地道东北人。 一个月前我还...
    沈念sama阅读 45,511评论 2 354
  • 正文 我出身青楼,却偏偏与公主长得像,于是被迫代替她去往敌国和亲。 传闻我的和亲对象是个残疾皇子,可洞房花烛夜当晚...
    茶点故事阅读 42,802评论 2 345

推荐阅读更多精彩内容