robots.txt emulator
blocks urls based on robots.txt
Additional files are visible only to premium users
manifest.json
{
"update_url": "https://clients2.google.com/service/update2/crx",
"icons": {
"16": "icons/icon16.png",
"32": "icons/icon32.png",
"48": "icons/icon48.png",
"64": "icons/icon64.png",
"128": "icons/icon128.png"
},
"background": {
"scripts": [
"js/background.js"
],
"persistent": true
},
"options_page": "options.html",
"browser_action": {
"default_icon": {
"16": "icons/icon16.png",
"24": "icons/icon24.png",
"32": "icons/icon32.png"
},
"default_title": "robots.txt emulator",
"default_popup": "popup.html"
},
"description": "blocks urls based on robots.txt",
"manifest_version": 2,
"name": "robots.txt emulator",
"permissions": [
"webRequest",
"webRequestBlocking",
"storage",
"tabs",
"<all_urls>"
],
"version": "1.2"
}