summaryrefslogtreecommitdiff
path: root/net/batman-adv
diff options
context:
space:
mode:
authorSven Eckelmann <sven@narfation.org>2016-07-15 17:39:27 +0200
committerSimon Wunderlich <sw@simonwunderlich.de>2016-08-09 07:54:49 +0200
commitf665fa7e85e9e32deadc8aee35c958334c812d8d (patch)
tree215ce270a5ec0db25d8d22b59f0c60bcee5a91bf /net/batman-adv
parent6a51e09d8b5828698217ac4a04b97de1e5415978 (diff)
batman-adv: Place kref_get for gw_node near use
It is hard to understand why the refcnt is increased when it isn't done near the actual place the new reference is used. So using kref_get right before the place which requires the reference and in the same function helps to avoid accidental problems caused by incorrect reference counting. Signed-off-by: Sven Eckelmann <sven@narfation.org> Signed-off-by: Marek Lindner <mareklindner@neomailbox.ch> Signed-off-by: Simon Wunderlich <sw@simonwunderlich.de>
Diffstat (limited to 'net/batman-adv')
-rw-r--r--net/batman-adv/gateway_client.c6
1 files changed, 5 insertions, 1 deletions
diff --git a/net/batman-adv/gateway_client.c b/net/batman-adv/gateway_client.c
index b889e1fdba4d..4b51b1cf4f76 100644
--- a/net/batman-adv/gateway_client.c
+++ b/net/batman-adv/gateway_client.c
@@ -339,14 +339,15 @@ static void batadv_gw_node_add(struct batadv_priv *bat_priv,
if (!gw_node)
return;
+ kref_init(&gw_node->refcount);
INIT_HLIST_NODE(&gw_node->list);
kref_get(&orig_node->refcount);
gw_node->orig_node = orig_node;
gw_node->bandwidth_down = ntohl(gateway->bandwidth_down);
gw_node->bandwidth_up = ntohl(gateway->bandwidth_up);
- kref_init(&gw_node->refcount);
spin_lock_bh(&bat_priv->gw.list_lock);
+ kref_get(&gw_node->refcount);
hlist_add_head_rcu(&gw_node->list, &bat_priv->gw.list);
spin_unlock_bh(&bat_priv->gw.list_lock);
@@ -357,6 +358,9 @@ static void batadv_gw_node_add(struct batadv_priv *bat_priv,
ntohl(gateway->bandwidth_down) % 10,
ntohl(gateway->bandwidth_up) / 10,
ntohl(gateway->bandwidth_up) % 10);
+
+ /* don't return reference to new gw_node */
+ batadv_gw_node_put(gw_node);
}
/**