diff --git a/opensearch.py b/opensearch.py
index 1708c8e..9cedc62 100755
--- a/opensearch.py
+++ b/opensearch.py
@@ -117,16 +117,18 @@ def scrape(output_file, input_file=None):
# map the region
location = ec2.canonicalize_location(attributes["location"])
instance_type = attributes["instanceType"]
- try:
+ if location == "Any":
+ region = "us-east-1"
+ elif location == "Asia Pacific (Osaka-Local)":
+ # at one point this region was local but was upgraded to a standard region
+ # however some SKUs still reference the old region
+ region = "ap-northeast-3"
+ regions[location] = region
+ elif location not in regions.values():
+ region = attributes["regionCode"]
+ regions[location] = region
+ else:
region = regions[location]
- except KeyError as e:
- if location == "Any":
- region = "us-east-1"
- else:
- print(
- f"WARNING: No region data for location={location}. Ignoring instance with sku={sku}, type={instance_type}"
- )
- continue
# set the attributes in line with the ec2 index
attributes["region"] = region
@@ -149,6 +151,7 @@ def scrape(output_file, input_file=None):
new_attributes.pop("region", None)
new_attributes.pop("usagetype", None)
new_attributes["pricing"] = attributes["pricing"]
+ new_attributes["regions"] = {}
instances[instance_type] = new_attributes
@@ -185,6 +188,15 @@ def scrape(output_file, input_file=None):
"ondemand": float(dimension["pricePerUnit"]["USD"])
}
+ # build the list of regions where each instance is available
+ # we have to do a reverse lookup from the regions list
+ l = ""
+ for l, r in regions.items():
+ if instance["region"] == r:
+ location = l
+ break
+ instances[instance["instance_type"]]["regions"][instance["region"]] = l
+
reserved_mapping = {
"1yr All Upfront": "yrTerm1.allUpfront",
"1yr Partial Upfront": "yrTerm1.partialUpfront",
diff --git a/rds.py b/rds.py
index 290f504..720240e 100644
--- a/rds.py
+++ b/rds.py
@@ -214,17 +214,6 @@ def scrape(output_file, input_file=None):
else:
region = regions[location]
- # try:
- # region = regions[location]
- # except KeyError as e:
- # if location == "Any":
- # region = "us-east-1"
- # else:
- # print(
- # f"WARNING: No region data for location={location}. Ignoring instance with sku={sku}, type={instance_type}"
- # )
- # continue
-
# set the attributes in line with the ec2 index
attributes["region"] = region
attributes["memory"] = attributes["memory"].split(" ")[0]
diff --git a/redshift.py b/redshift.py
index 006602c..283f8fe 100755
--- a/redshift.py
+++ b/redshift.py
@@ -105,16 +105,19 @@ def scrape(output_file, input_file=None):
# map the region
location = ec2.canonicalize_location(attributes["location"])
instance_type = attributes["instanceType"]
- try:
+
+ if location == "Any":
+ region = "us-east-1"
+ elif location == "Asia Pacific (Osaka-Local)":
+ # at one point this region was local but was upgraded to a standard region
+ # however some SKUs still reference the old region
+ region = "ap-northeast-3"
+ regions[location] = region
+ elif location not in regions.values():
+ region = attributes["regionCode"]
+ regions[location] = region
+ else:
region = regions[location]
- except KeyError as e:
- if location == "Any":
- region = "us-east-1"
- else:
- print(
- f"WARNING: No region data for location={location}. Ignoring instance with sku={sku}, type={instance_type}"
- )
- continue
# set the attributes in line with the ec2 index
attributes["region"] = region
@@ -137,6 +140,7 @@ def scrape(output_file, input_file=None):
new_attributes.pop("region", None)
new_attributes.pop("usagetype", None)
new_attributes["pricing"] = attributes["pricing"]
+ new_attributes["regions"] = {}
instances[instance_type] = new_attributes
@@ -173,6 +177,15 @@ def scrape(output_file, input_file=None):
"ondemand": float(dimension["pricePerUnit"]["USD"])
}
+ # build the list of regions where each instance is available
+ # we have to do a reverse lookup from the regions list
+ l = ""
+ for l, r in regions.items():
+ if instance["region"] == r:
+ location = l
+ break
+ instances[instance["instance_type"]]["regions"][instance["region"]] = l
+
reserved_mapping = {
"1yr All Upfront": "yrTerm1.allUpfront",
"1yr Partial Upfront": "yrTerm1.partialUpfront",
diff --git a/render.py b/render.py
index 65637fc..4c3ffce 100644
--- a/render.py
+++ b/render.py
@@ -256,11 +256,11 @@ def render(data_file, template_file, destination_file, detail_pages=True):
elif data_file == "www/opensearch/instances.json":
all_regions = regions["main"].copy()
if detail_pages:
- sitemap.extend(build_detail_pages_opensearch(instances, destination_file))
+ sitemap.extend(build_detail_pages_opensearch(instances, all_regions))
elif data_file == "www/redshift/instances.json":
all_regions = regions["main"].copy()
if detail_pages:
- sitemap.extend(build_detail_pages_redshift(instances, destination_file))
+ sitemap.extend(build_detail_pages_redshift(instances, all_regions))
generated_at = datetime.datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S UTC")
pricing_json, instance_azs_json = per_region_pricing(
@@ -300,9 +300,6 @@ def render(data_file, template_file, destination_file, detail_pages=True):
"www/cache/index.html",
)
)
- import sys
-
- sys.exit(0)
sitemap.extend(
render(
"www/redshift/instances.json",