I have some code to scale and translate a map in D3 but the performance is quite terrible. When zooming and panning, it's taking nearly 3 seconds for a refresh. I thought the map would look nicer including line boundaries for all the counties, but at 6MB+ I suspect this may be where the bottleneck is coming from. Is there another way I should be handling the transforms or maybe a way to optimize the map data? Is D3 really not suited to this level of detail? Very new to D3.
I'm using shape files from here, converted from DBF to Geojson using QGIS: https://www.census.gov/cgi-bin/geo/shapefiles2010/main
<!doctype html>
<html>
<head>
<title>d3 map</title>
<script src="http://d3js.org/d3.v3.min.js">
</script>
</head>
<body>
<script>
var width = 800;
var height = 600;
var projection = d3.geo.mercator();
var path = d3.geo.path().projection (projection);
var canvas = d3.select ("body")
.append ("svg")
.attr ("width", width)
.attr ("height", height)
var zoomVar = d3.behavior.zoom()
.translate(projection.translate())
.scale(projection.scale())
.scaleExtent([height, 60 * height])
.on("zoom", onPostZoom);
var hotbox = canvas.append("g").call(zoomVar);
hotbox.append("rect")
.attr("class", "background")
.attr("width", width)
.attr("fill", "white")
.attr("height", height);
d3.json ("cali.geojson", function (data)
{
hotbox.append("g")
.attr("id", "geometry")
.selectAll("path")
.data(data.features)
.enter()
.append("path")
.attr("d", path)
.attr("fill", "steelblue")
.on("click", onClick);
})
function onClick (d)
{
var centroid = path.centroid(d), translate = projection.translate();
projection.translate(
[translate[0] - centroid[0] + width / 2,
translate[1] - centroid[1] + height / 2 ]);
zoomVar.translate(projection.translate());
hotbox.selectAll("path").transition()
.duration(700)
.attr("d", path);
}
function onPostZoom()
{
projection.translate(d3.event.translate).scale(d3.event.scale);
hotbox.selectAll("path").attr("d", path);
}
</script>
</body>
</html>