From 740139408dae7d968966c57e13ee40afd35cacfb Mon Sep 17 00:00:00 2001 From: Ryan Date: Wed, 13 May 2009 23:35:36 +0200 Subject: [PATCH] Fix memory leak in timer. --- src/http.js | 65 ++++++++++++++++++++++++++-------------------------- src/node.cc | 4 ++-- src/timer.cc | 10 ++++---- 3 files changed, 39 insertions(+), 40 deletions(-) diff --git a/src/http.js b/src/http.js index cc7c7aa750..61ecae6b3d 100644 --- a/src/http.js +++ b/src/http.js @@ -47,18 +47,18 @@ var close_expression = /close/i; var chunk_expression = /chunk/i; var content_length_expression = /Content-Length/i; -node.http.Server = function (RequestHandler, options) { +node.http.server = function (RequestHandler, options) { function Protocol (connection) { - // An array of responses for each connection. In pipelined connections + // An array of messages for each connection. In pipelined connections // we need to keep track of the order they were sent. - var responses = []; + var messages = []; - function Response () { - responses.push(this); + function Message () { + messages.push(this); /* This annoying output buisness is necessary for the case that users - * are writing to responses out of order! HTTP requires that responses + * are writing to messages out of order! HTTP requires that messages * are returned in the same order the requests come. */ @@ -110,7 +110,7 @@ node.http.Server = function (RequestHandler, options) { }; this.flush = function () { - if (responses.length > 0 && responses[0] === this) + if (messages.length > 0 && messages[0] === this) while (output.length > 0) connection.send(output.shift()); }; @@ -192,34 +192,35 @@ node.http.Server = function (RequestHandler, options) { this.finished = true; - while (responses.length > 0 && responses[0].finished) { - var res = responses[0]; + while (messages.length > 0 && messages[0].finished) { + var res = messages[0]; res.flush(); - responses.shift(); + messages.shift(); } - if (responses.length == 0 && connection_close) { + if (messages.length == 0 && connection_close) { connection.fullClose(); } }; + + // abstract + this.onBody = function () { return true; } + this.onBodyComplete = function () { return true; } } this.onMessage = function ( ) { - var res = new Response(); - var req = new RequestHandler(res, connection); - - this.encoding = req.encoding || "raw"; + var msg = new Message(); - req.path = ""; - req.uri = ""; - req.query_string = ""; - req.fragment = ""; - var headers = req.headers = []; + msg.path = ""; + msg.uri = ""; + msg.query_string = ""; + msg.fragment = ""; + var headers = msg.headers = []; - this.onPath = function (data) { req.path += data; return true }; - this.onURI = function (data) { req.uri += data; return true }; - this.onQueryString = function (data) { req.query_string += data; return true; }; - this.onFragment = function (data) { req.fragment += data; return true; }; + this.onPath = function (data) { msg.path += data; return true }; + this.onURI = function (data) { msg.uri += data; return true }; + this.onQueryString = function (data) { msg.query_string += data; return true; }; + this.onFragment = function (data) { msg.fragment += data; return true; }; var last_was_value = false; @@ -243,23 +244,21 @@ node.http.Server = function (RequestHandler, options) { }; this.onHeadersComplete = function () { - req.http_version = this.http_version; - req.method = this.method; - res.should_keep_alive = this.should_keep_alive; - return req.onHeadersComplete(); + msg.http_version = this.http_version; + msg.method = this.method; + msg.should_keep_alive = this.should_keep_alive; + return RequestHandler(msg); }; this.onBody = function (chunk) { - return req.onBody(chunk); + return msg.onBody(chunk); }; this.onBodyComplete = function () { - return req.onBodyComplete(); + return msg.onBodyComplete(chunk); }; }; } - var server = new node.http.LowLevelServer(Protocol, options); - this.listen = function (port, host) { server.listen(port, host); } - this.close = function () { server.close(); } + return new node.http.LowLevelServer(Protocol, options); }; diff --git a/src/node.cc b/src/node.cc index 061930f404..1a949b1fe2 100644 --- a/src/node.cc +++ b/src/node.cc @@ -50,8 +50,8 @@ ObjectWrap::Attach () void ObjectWrap::Detach () { - attach_count_ -= 1; - assert(attach_count_ >= 0); + if (attach_count_ > 0) + attach_count_ -= 1; if(weak_ && attach_count_ == 0) delete this; diff --git a/src/timer.cc b/src/timer.cc index 518259559d..6cbb96c23e 100644 --- a/src/timer.cc +++ b/src/timer.cc @@ -5,6 +5,8 @@ using namespace v8; using namespace node; +#define CALLBACK_SYMBOL String::NewSymbol("callback") + void Timer::Initialize (Handle target) { @@ -25,7 +27,7 @@ Timer::OnTimeout (EV_P_ ev_timer *watcher, int revents) HandleScope scope; - Local callback_value = timer->handle_->Get(String::NewSymbol("callback")); + Local callback_value = timer->handle_->GetHiddenValue(CALLBACK_SYMBOL); if (!callback_value->IsFunction()) return; @@ -40,7 +42,7 @@ Timer::OnTimeout (EV_P_ ev_timer *watcher, int revents) * it's rather crutial for memory leaks the conditional here test to see * if the watcher will make another callback. */ - if (false == ev_is_active(&timer->watcher_)) + if (!ev_is_active(&timer->watcher_)) timer->Detach(); } @@ -49,13 +51,11 @@ Timer::Timer (Handle handle, Handle callback, ev_tstamp after, { HandleScope scope; - handle_->Set(String::NewSymbol("callback"), callback); + handle_->SetHiddenValue(CALLBACK_SYMBOL, callback); ev_timer_init(&watcher_, Timer::OnTimeout, after, repeat); watcher_.data = this; - ev_timer_start(EV_DEFAULT_UC_ &watcher_); - Attach(); } Timer::~Timer ()