下面的代码中,prepareData函数可为每批100个测点产生所有记录,writeData把数据批量写入分布式表,loopWrite是把100万个测点用cut函数分割后用loop循环执行。
def prepareData(idVec, startTime,n){ idSize=size(idVec) records= n * idSize id = array(INT, records) for(i in 0:idSize) id[(i*n) : ((i+1)*n)] = idVec[i] return table(id, take(startTime+(0..(n-1))*5,records) as time, rand(100.0, records) as value,take(2.0,records) as quality) } def writeData(idVec,startTime,n){ pt=loadTable("dfs://VALUE3","pt") pt.append!(prepareData(idVec,startTime,n)) } def loopWrite(idVec,startTime,n){ ids=idVec.cut(100) loop(writeData{,startTime,n},ids) } login("admin","123456") idVec=800000..1799999 startTime=2019.03.22 15:43:13; n=350000 submitJob("submit_write", "write data", loopWrite{idVec, startTime, n})