Azure Cosomos DB存储过程响应大小太大 [英] Azure Cosomos DB stored procedure response size too large

查看:120
本文介绍了Azure Cosomos DB存储过程响应大小太大的处理方法,对大家解决问题具有一定的参考价值,需要的朋友们下面随着小编来一起学习吧!

问题描述

我有传感器物联网数据,该数据已存储在cosmos DB中.我需要最近30天的分钟数据汇总才能显示在webapp中.因此,我编写了存储过程,以便对传感器数据进行分组和汇总.

I have Sensor IoT Data which is getting stored in cosmos DB . I need minute level aggregation of data for last 30 days to show in webapp. So i have written stored procedure for it to do group by and aggregations of sensors data.

出现以下错误

无法执行存储过程来收集newData:{"code":400,"body":"{\" code \:\" BadRequest \,\" message \:\"消息:{\\"Errors \\":[\\执行功能时遇到异常.Exception=错误:由于\\\" x-ms-documentdb-script-log-results \\\\,结果消息将太大.从脚本返回当前消息,并使用延续令牌再次调用该脚本或修改您的脚本.\\ r \\ n堆栈跟踪:错误:由于\\\"x-ms-documentdb-script-log-results \\\\.从脚本返回当前消息,并使用延续令牌再次调用该脚本或修改您的脚本.\\ n位于validateSize(sa \\"]} \ r \ nActivityId:46713736-fe18-4fd1-8df1-49fa615c7289,请求URI:/apps/35edbe01-33d4-4189-9959-240fe985a75e/services/f3955cd0-044e-4a48-ad24-ae51a24c13b8/partitions/784e5371-950b-476d-a765-52ddb784f8dd131850819031922581p/,RequestStats:\ r \ nRequestStartTime:2018-11-06T21:08:06.7386246Z,Nu尝试的区域区域:1 \ r \ n,SDK:Microsoft.Azure.Documents.Common/2.1.0.0 \}","activityId":"46713736-fe18-4fd1-8df1-49fa615c7289","substatus":413}

Failed to execute stored procedure something for collection newData: {"code":400,"body":"{\"code\":\"BadRequest\",\"message\":\"Message: {\\"Errors\\":[\\"Encountered exception while executing function. Exception = Error: Resulting message would be too large because of \\\\"x-ms-documentdb-script-log-results\\\\". Return from script with current message and use continuation token to call the script again or modify your script.\\r\\nStack trace: Error: Resulting message would be too large because of \\\\"x-ms-documentdb-script-log-results\\\\". Return from script with current message and use continuation token to call the script again or modify your script.\\n at validateSize (sa\\"]}\r\nActivityId: 46713736-fe18-4fd1-8df1-49fa615c7289, Request URI: /apps/35edbe01-33d4-4189-9959-240fe985a75e/services/f3955cd0-044e-4a48-ad24-ae51a24c13b8/partitions/784e5371-950b-476d-a765-52ddb784f8dd/replicas/131850819031922581p/, RequestStats: \r\nRequestStartTime: 2018-11-06T21:08:06.7386246Z, Number of regions attempted: 1\r\n, SDK: Microsoft.Azure.Documents.Common/2.1.0.0\"}","activityId":"46713736-fe18-4fd1-8df1-49fa615c7289","substatus":413}

由于具有1秒级别数据的传感器需要我找到1分钟的汇总,因此我无法将流Analytics Job用作转发对象,我不知道avg()中要保留的传感器名称是什么.

As the sensors which has 1 sec level data for which i need to find 1 minute aggregates i cannot use stream Analytics Job as forward i don't know what are the sensor names to keep in avg() .

仅保留我的一个选项是运行存储过程并取回1分钟的汇总.

Only option left out for me is run the stored procedure and get back the 1 minute aggregates.

我已经将Javascript用于存储过程,并且在我的api调用中,我正在使用Java Spring Boot.请给我任何建议,我如何才能超出Cosmos DB的限制,或者如何在Cosmos DB中存储1分钟的聚合数据,以便我可以检索这些记录.

I have use Javascript for stored procedure and in My api call i am using java Spring boot. kindly give me any suggestion how can i surpass this limitation Cosmos DB or how can i store 1 min aggregates in Cosmos DB so that i can retrieve those records.

以下是我的过程,我正在传递3个字符串参数,例如"pressure,Temp,volume""123444""345552"

function something(variable1 , variable2 , variable3) {


var variables = variable1.split(",");
var parameters = variable1.split(",");
var variablestrings = '' , totals = {} ;
var keys = [] , values = [] , totals = [] ;
var dataPoints = {} , results = [], k , l  , p ,i;
var resultPoints = [],value;

var collection = getContext().getCollection();

for ( var i = 0 ; i < variables.length ; i = i +1 ) {
        results[i] = {};
        results[i].variableName = variables[i] ; 
        results[i].data = [];
        variables[i] = 'r.' + variables[i];
} 

variablestrings = variables.toString() + ' , r._ts ' ; 

var queryString = 'SELECT ' + variablestrings + 'FROM root r where r._ts between  ' +  variable2 + ' AND ' + variable3 ;   

// Query documents and take 1st item.
var isAccepted = collection.queryDocuments(
    collection.getSelfLink(),
    queryString,
function (err, feed, options) {
   // console.log(feed[0]._ts);
    if (err) throw err;

    // Check the feed and if empty, set the body to 'no docs found', 
    // else take 1st element from feed
    if (!feed || !feed.length) {
        var response = getContext().getResponse();
        response.setBody('no docs found');
    }
    else {
            //console.log(feed.length);
            feed.forEach( (item, index) => { 
            //var x = JSON.parse( item ) ; 
            var d1 = new Date( item._ts ) ; 
            item['timeInMinutes'] =  d1.getFullYear() + '-' + d1.getUTCMonth() + '-' + d1.getUTCDate() + ' ' + d1.getUTCHours() + ':' + d1.getUTCMinutes() + ':00' ;
            //delete item[_ts];
            //console.log(JSON.stringify(item));
            if( ! dataPoints[ item['timeInMinutes'] ] ) {
                 dataPoints[ item['timeInMinutes'] ] = [];
                dataPoints[ item['timeInMinutes'] ].push(item);
            }
            else{
                dataPoints[ item['timeInMinutes'] ].push(item);

            }
            //arrayObjects.push( item ) ;

            }  ); 

        values  = Object.values( dataPoints ) ;
            for ( k = 0 ; k < values.length ; k = k+1){
                value = values[k] ;

                for (  l = 0 ; l < parameters.length ; l = l +1){
                    totals[ parameters[l] ]= 0;
                }

                for (  p = 0 ; p < parameters.length ; p = p +1){

                    for (  i = 0 ; i < value.length ; i = i +1 ){
                        totals[ parameters[p]] = value[i][parameters[p]] + totals[parameters[p]] ; 
                    }

                    results[p].data.push({ 'x-axis' : value[0]['timeInMinutes'] , 'y-axis' : (totals[ parameters[p] ] / value.length ) });

                }
            }
            //console.log(results[0]);
            var response = getContext().getResponse();        
            response.setBody(results);


    }
});

if (!isAccepted) throw new Error('The query was not accepted by the server.');

}

推荐答案

基于@Chris Anderson-MSFT在对OP问题的评论中的建议.

Based on @Chris Anderson-MSFT suggestion in comments to OP's question.

就我而言,我用javascript编写的存储过程几乎没有日志记录语句.只需删除console.logs对我有用.

In my case, my Stored Procedure written in javascript had few logging statements. Simply removing console.logs worked for me.

这篇关于Azure Cosomos DB存储过程响应大小太大的文章就介绍到这了,希望我们推荐的答案对大家有所帮助,也希望大家多多支持IT屋!

查看全文
登录 关闭
扫码关注1秒登录
发送“验证码”获取 | 15天全站免登陆