I have created an ASP.net application with several aspx. Some of them are used like an API link. These API pages are consumed by J2ME application which passes some headers. I am checking these headers specifically "Accept-Encoding" so that my page could send compressed contents.
The following code shows a sample code from one of those API pages.
byte[] buffer;
int bufferLength;
protected void Page_Load(object sender, EventArgs e)
{
bufferLength = Request.ContentLength;
buffer = new byte[bufferLength];
buffer = Request.BinaryRead(bufferLength);
string s = Encoding.ASCII.GetString(buffer);
bool needEncrypted = (Request.Headers["Accept-Encoding"] != null);
if (!string.IsNullOrEmpty(s))
{
JavaScriptSerializer ser = new JavaScriptSerializer();
CRequestQuestParameter QuestPara = ser.Deserialize<CRequestQuestParameter>(s);
CUtil utilFun = new CUtil(WebConfigurationManager.ConnectionStrings["TheConnectionString"].ToString());
CResponseQuestionSets res = new CResponseQuestionSets();
//No error setting
res.ErrorCode = -1;
res.ErrorMessage = "";
res.IsError = false;
//Return data
res.ResponseData = utilFun.GetQuestionSet(QuestPara);
if (res.ResponseData != null) {
res.ErrorCode = -1;
res.ErrorMessage = "";
res.IsError = false;
}
else
{
res.ErrorCode = 102;
res.ErrorMessage = "User does not exists";
res.IsError = true;
}
if (needEncrypted)
{
HttpContext context = HttpContext.Current;
context.Response.Filter = new GZipStream(context.Response.Filter, CompressionMode.Compress);
HttpContext.Current.Response.AppendHeader("Content-encoding", "gzip");
}
Response.ContentType = "application/json charset=utf-8";
Response.Write(ser.Serialize(res));
Response.End();
}
}
The problem here is that the J2ME application is not receiving the data as compressed but does gets the header "Content-Encoding".
Can anybody help me out solve this issue?