A few days ago, the greatest API in the entire Internet was released - the Marvel API. Ok, maybe greatest is a strong word, but I love APIs, I love comics, and the combination of the two is nothing less than Galactus-level news. (And by Galactus I mean the giant purple guy, not the amorphous giant cloud from the forgettable Fantastic Four movie.)

The API supports getting data on characters, comics, creators, events, stories, series, and stories. You can try out their interactive docs for details. You will want to sign up for a key first though so you can actually see results. The docs are well done, but currently have a silly CSS bug that prevents you from copying text from them. (You can correct this via DevTools - if you don't know how to do that just leave me a comment in the docs.) The API doesn't support text-based searches yet, so if you wanted to find all the "Spider" characters you're out of luck. (But this has been requested as well.)

Outside of that the API is pretty powerful with a lot of options. You can use the API in both client-side and server-side applications with a very simple GET request. The API does have a pretty low limit (imo) of 1000 calls per day. Multiple people have asked for a higher limit and the Marvel folks have said they would consider it. They wanted to launch with a lower limit just to be careful and I can understand that. Another oddity is that they don't have a proper forum for API discussions yet. Instead they have one page with comments. That's not going to scale well. I hope they change to something else rather soon as it is already getting a bit messy. (Heck, even a simple Google Group would be cool.)

I worked on a simple demo using the Comics API. The API lets you fetch comic data and apply multiple different filters. So you can ask for collections versus single comics - request comics at a certain date - or even look for a particular character. The result data for an individual comic is very detailed.

For my first demo I thought it would be interesting to do a date comparison. I wrote a demo that would fetch 100 comics for a year and figure out the average price and page count, and display 5 random images. I was curious to see how prices and sizes had changed over the years. Let's look at the code. First, my HTML.

<!DOCTYPE html>
<html>
	<head>
		<meta charset="utf-8">
		<meta http-equiv="X-UA-Compatible" content="IE=edge,chrome=1">
		<title></title>
		<meta name="description" content="">
		<meta name="viewport" content="width=device-width">
		<script type="text/javascript" src="http://ajax.googleapis.com/ajax/libs/jquery/1/jquery.min.js"></script>
		<script src="handlebars-v1.3.0.js"></script>
		<link rel="stylesheet" href="style.css">
	</head>
	<body>

	<div id="results"></div>
	<div id="status"></div>

	<script id="reportTemplate" type="text/x-handlebars-template">
		<h1>{{year}}</h1>
		<p>
		Average Price: ${{avgPrice}}<br/>
		Low/High Price: ${{minPrice}} / ${{maxPrice}}<br/>
		Average Page Count: {{avgPageCount}}<br/>
		</p>
		{{#each thumbs}}
		<img src="{{this}}" class="thumb">
		{{/each}}
		<br clear="left">
		<p/>
	</script>
	<script src="app.js"></script>

	</body>
</html>

There isn't much here but a Handlebars template to handle the results. Let's look at the JavaScript now.

/* global $,console,document,Handlebars */

//default not avail image
var IMAGE_NOT_AVAIL = "http://i.annihil.us/u/prod/marvel/i/mg/b/40/image_not_available";

//my key
var KEY = "mykeyisbetterthanyours";

//credit: http://stackoverflow.com/a/1527820/52160
function getRandomInt (min, max) {
	return Math.floor(Math.random() * (max - min + 1)) + min;
}
		
function getComicData(year) {
	var url = "http://gateway.marvel.com/v1/public/comics?limit=100&format=comic&formatType=comic&dateRange="+year+"-01-01%2C"+year+"-12-31&apikey="+KEY;
	console.log('getComicData('+year+')');
	return $.get(url);
}
		
$(document).ready(function() {
	
	var $results = $("#results");
	var $status = $("#status");
	
	var templateSource = $("#reportTemplate").html();
	var template = Handlebars.compile(templateSource);
	var start = 2013;
	var end = 1950;
	
	var promises = [];
	
	$status.html("<i>Getting comic book data - this will be slow - stand by...</i>");
	
	for(var x=start; x>=end; x--) {
		promises.push(getComicData(x));
	}
	
	$.when.apply($,promises).done(function() {

		var args = Array.prototype.slice.call(arguments, 0);

		$status.html("");
		
		for(var x=0; x<args.length; x++) {
			var year = start-x;
			console.log("displaying year", year);	

			var stats = {};
			stats.year = year;
			stats.priceTotal = 0;
			stats.priceCount = 0;
			stats.minPrice = 999999999;
			stats.maxPrice = -999999999;
			stats.pageTotal = 0;
			stats.pageCount = 0;
			stats.pics = [];
			
			var res = args[x][0];
			
			if(res.code === 200) {
				for(var i=0;i<res.data.results.length;i++) {
					var comic = res.data.results[i];
					//just get the first item
					if(comic.prices.length && comic.prices[0].price !== 0) {
						stats.priceTotal += comic.prices[0].price;
						if(comic.prices[0].price > stats.maxPrice) stats.maxPrice = comic.prices[0].price;
						if(comic.prices[0].price < stats.minPrice) stats.minPrice = comic.prices[0].price;
						stats.priceCount++;
					}
					if(comic.pageCount > 0) {
						stats.pageTotal+=comic.pageCount;
						stats.pageCount++;
					}
					if(comic.thumbnail && comic.thumbnail.path != IMAGE_NOT_AVAIL) stats.pics.push(comic.thumbnail.path + "." + comic.thumbnail.extension);
					
				}
				stats.avgPrice = (stats.priceTotal/stats.priceCount).toFixed(2);
				stats.avgPageCount = (stats.pageTotal/stats.pageCount).toFixed(2);
				
				//pick 5 thumbnails at random
				stats.thumbs = [];
				while(stats.pics.length > 0 && stats.thumbs.length < 5) {
					var chosen = getRandomInt(0, stats.pics.length);
					stats.thumbs.push(stats.pics[chosen]);
					stats.pics.splice(chosen, 1);
				}
				
				console.dir(stats);
				var html = template(stats);
				$results.append(html);
			}
		}
	});
	
});

There isn't a lot to this. I basically loop over a set of years and fire off async requests to get the data. For each year I figure out my averages, collect images, and pick out 5 random ones. Finally the results are printed to screen. This app is slow as I wait for all 63 requests to finish before I render. A better demo would render as the results came in and properly handle displaying them in the right order. The result was... fascinating.

I kinda knew prices would go up over time so that wasn't surprising. In 2013 my data shows an average of $4.12 compared to ten cents in 1950. Page count is a bit lower, but not dramatically. What was really epic was the covers. I mean, I knew styles have changed over time, but to see it all at once was awesome! For example, here are the ones I got for 2013.

Now go back to 1985.

And finally - 1960.

Because of the API limit I can't share the live application, but I did take the rendered output and save that. If you're curious, you can get dynamically generated HTML by simply opening up the console and doing $(body).html(). You can view the static report here: https://static.raymondcamden.com/demos/2014/jan/31/report.html

So - I realized that the coolest part of that last demo was the covers. So I built a second demo focused just on that. I created a Node.js/Express application that did one thing: Pick a random year, pick a random month, and pick a random cover. It then displayed this to the user along with the title/publication date on the lower left side. Because it was server-side, I was able to use caching. I used a range of 1960 to 2013, which is 756 different API calls. In theory - I should be able to run the application and never hit my limit. I also built in code to handle cases where the API limits are hit anyway. If I have at least 5 months cached, I'll just use the existing cache. I'll share the entire code base, but here is the marvel.js module the app uses to return the cover.

/* global require,exports, console */
var http = require('http');
var crypto = require('crypto');

var cache = [];

var PRIV_KEY = "iamthegatekeeper";
var API_KEY = "iamthekeymaster";

//default not avail image
var IMAGE_NOT_AVAIL = "http://i.annihil.us/u/prod/marvel/i/mg/b/40/image_not_available";

exports.getCache = function() { return cache; };

function getRandomInt (min, max) {
	return Math.floor(Math.random() * (max - min + 1)) + min;
}

Object.size = function(obj) {
    var size = 0, key;
    for (key in obj) {
        if (obj.hasOwnProperty(key)) size++;
    }
    return size;
};

function getCover(cb) {
	//first select a random year
	var year = getRandomInt(1960, 2013);
	//then a month
	var month = getRandomInt(1,12);

	var cache_key = year + "_" + month;
	
	if(cache_key in cache) {
		console.log('had cache for '+cache_key);
		var images = cache[cache_key].images;
		cache[cache_key].hits++;
		cb(images[getRandomInt(0, images.length-1)]);		
	} else {
		var monthStr = month<10?"0"+month:month;
		//lame logic for end of month
		var eom = month==2?28:30;
		var beginDateStr = year + "-" + monthStr + "-01";
		var endDateStr = year + "-" + monthStr + "-" + eom;
		var url = "http://gateway.marvel.com/v1/public/comics?limit=100&format=comic&formatType=comic&dateRange="+beginDateStr+"%2C"+endDateStr+"&apikey="+API_KEY;
		var ts = new Date().getTime();
		var hash = crypto.createHash('md5').update(ts + PRIV_KEY + API_KEY).digest('hex');
		url += "&ts="+ts+"&hash="+hash;
		//TEMP
		//var url ="http://127.0.0.1/testingzone/html5tests/marvel/resp.json";
		
		console.log(new Date()+' '+url);
		
		http.get(url, function(res) {
			var body = "";

			res.on('data', function (chunk) {
				body += chunk;
			});
			
			res.on('end', function() {
				//result.success = true;

				var result = JSON.parse(body);
				var images;
				
				if(result.code === 200) {
					images = [];
					console.log('num of comics '+result.data.results.length);
					for(var i=0;i<result.data.results.length;i++) {
						var comic = result.data.results[i];
						//console.dir(comic);
						if(comic.thumbnail && comic.thumbnail.path != IMAGE_NOT_AVAIL) {
							var image = {};
							image.title = comic.title;
							for(var x=0; x<comic.dates.length;x++) {
								if(comic.dates[x].type === 'onsaleDate') {
									image.date = new Date(comic.dates[x].date);
								}
							}
							image.url = comic.thumbnail.path + "." + comic.thumbnail.extension;
							images.push(image);
						}
					}
					//console.dir(images);
					//now cache it
					cache[cache_key] = {hits:1};
					cache[cache_key].images = images;
					cb(images[getRandomInt(0, images.length-1)]);
				} else if(result.code === "RequestThrottled") {
					console.log("RequestThrottled Error");
					/*
					So don't just fail. If we have a good cache, just grab from there
					*/
					if(Object.size(cache) > 5) {
						var keys = [];
						for(var k in cache) keys.push(k);
						var randomCacheKey = keys[getRandomInt(0,keys.length-1)];
						images = cache[randomCacheKey].images;
						cache[randomCacheKey].hits++;
						cb(images[getRandomInt(0, images.length-1)]);		
					} else {
						cb({error:result.code});
					}
				} else {
					console.log(new Date() + ' Error: '+JSON.stringify(result));
					cb({error:result.code});
				}
				//console.log(data);
			});
		
		});
	}

}

exports.getCover = getCover;

Here is a screenshot:

You can view this yourself here: marvel.raymondcamden.com. Note that I am not displaying the "Data by Marvel" attribution label yet and I need to add that to comply with Marvel's API rules. (Which is totally fair - I just haven't wanted to restart the server yet!)

I know I'm biased, but I love my demo. The first time I watched it I saw titles I had never heard of. I don't know if I'd consider myself a "serious" comic collector, I just buy what I like, but seeing the depth of history in the Marvel line is actually encouraging me to pick up more comics from the past. I've included the full source code for both my demos as an attachment to this blog post. I've removed the keys obviously.

Download attached file.